jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| s.as_slice()),
dev_deps: true,
- features: options.flag_features.as_slice(),
+ features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| s.as_slice()),
lib_only: false,
};
let err = try!(ops::run_benches(&root, &ops,
- options.arg_args.as_slice()).map_err(|err| {
+ &options.arg_args).map_err(|err| {
CliError::from_boxed(err, 101)
}));
match err {
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| t.as_slice()),
dev_deps: false,
- features: options.flag_features.as_slice(),
+ features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| s.as_slice()),
lib_only: options.flag_lib,
return Ok(None)
}
- let (mut args, command) = match flags.arg_command.as_slice() {
+ let (mut args, command) = match &flags.arg_command[] {
"" | "help" if flags.arg_args.len() == 0 => {
config.shell().set_verbose(true);
let args = &["foo".to_string(), "-h".to_string()];
cargo::process_executed(r, &mut **config.shell());
return Ok(None)
}
- "help" if flags.arg_args[0].as_slice() == "-h" ||
- flags.arg_args[0].as_slice() == "--help" =>
+ "help" if flags.arg_args[0] == "-h" ||
+ flags.arg_args[0] == "--help" =>
(flags.arg_args, "help"),
- "help" => (vec!["-h".to_string()], flags.arg_args[0].as_slice()),
+ "help" => (vec!["-h".to_string()], &flags.arg_args[0][]),
s => (flags.arg_args.clone(), s),
};
args.insert(0, command.to_string());
args.insert(0, "foo".to_string());
macro_rules! cmd{ ($name:ident) => (
- if command.as_slice() == stringify!($name).replace("_", "-").as_slice() {
+ if command == stringify!($name).replace("_", "-") {
mod $name;
config.shell().set_verbose(true);
let r = cargo::call_main_without_stdin($name::execute, config,
$name::USAGE,
- args.as_slice(),
+ &args,
false);
cargo::process_executed(r, &mut **config.shell());
return Ok(None)
) }
each_subcommand!(cmd);
- execute_subcommand(command.as_slice(), args.as_slice(),
- &mut **config.shell());
+ execute_subcommand(&command, &args, &mut config.shell());
Ok(None)
}
// c.lev_distance(cmd))) allows us to only make
// suggestions that have an edit distance of
// 3 or less
- .map(|c| (lev_distance(c.as_slice(), cmd), c))
+ .map(|c| (lev_distance(&c, cmd), c))
.filter(|&(d, _): &(usize, &String)| d < 4)
.min_by(|&(d, _)| d) {
Some((_, c)) => {
Did you mean `{}`?\n", closest),
None => "No such subcommand".to_string()
};
- return handle_error(CliError::new(msg, 127), shell)
+ return handle_error(CliError::new(&msg, 127), shell)
}
};
let status = Command::new(command)
}
Ok(ExitSignal(i)) => {
let msg = format!("subcommand failed with signal: {}", i);
- handle_error(CliError::new(msg, i as i32), shell)
+ handle_error(CliError::new(&msg, i as i32), shell)
}
Err(old_io::IoError{kind, ..}) if kind == old_io::FileNotFound =>
handle_error(CliError::new("No such subcommand", 127), shell),
Err(err) => handle_error(
CliError::new(
- format!("Subcommand failed to run: {}", err), 127),
+ &format!("Subcommand failed to run: {}", err), 127),
shell)
}
}
fn find_command(cmd: &str) -> Option<Path> {
let command_exe = format!("cargo-{}{}", cmd, env::consts::EXE_SUFFIX);
let dirs = list_command_directory();
- let mut command_paths = dirs.iter().map(|dir| dir.join(command_exe.as_slice()));
+ let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe));
command_paths.find(|path| path.exists())
}
jobs: options.flag_jobs,
target: None,
dev_deps: false,
- features: options.flag_features.as_slice(),
+ features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| s.as_slice()),
lib_only: false,
config.shell().set_verbose(options.flag_verbose);
let Options { flag_url: url, flag_reference: reference, .. } = options;
- let url = try!(url.as_slice().to_url().map_err(|e| {
+ let url = try!(url.to_url().map_err(|e| {
human(format!("The URL `{}` you passed was \
not a valid URL: {}", url, e))
})
let mut source = GitSource::new(&source_id, config);
try!(source.update().map_err(|e| {
- CliError::new(format!("Couldn't update {:?}: {:?}", source, e), 1)
+ CliError::new(&format!("Couldn't update {:?}: {:?}", source, e), 1)
}));
Ok(None)
}
};
- let token = token.as_slice().trim().to_string();
+ let token = token.trim().to_string();
try!(ops::registry_login(config, token).map_err(|e| {
CliError::from_boxed(e, 101)
}));
let opts = ops::NewOptions {
version_control: flag_vcs,
- path: arg_path.as_slice(),
+ path: &arg_path,
bin: flag_bin,
};
";
pub fn execute(options: Options, config: &Config) -> CliResult<Option<Package>> {
- let path = Path::new(options.flag_manifest_path.as_slice());
+ let path = Path::new(&options.flag_manifest_path);
let mut source = try!(PathSource::for_path(&path, config).map_err(|e| {
CliError::new(e.description(), 1)
}));
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|t| t.as_slice()),
dev_deps: true,
- features: options.flag_features.as_slice(),
+ features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: None,
lib_only: false,
target_kind,
name,
&compile_opts,
- options.arg_args.as_slice()).map_err(|err| {
+ &options.arg_args).map_err(|err| {
CliError::from_boxed(err, 101)
}));
match err {
..
} = options;
- ops::search(query.as_slice(), config, host)
+ ops::search(&query, config, host)
.map(|_| None)
.map_err(|err| CliError::from_boxed(err, 101))
}
jobs: options.flag_jobs,
target: options.flag_target.as_ref().map(|s| s.as_slice()),
dev_deps: true,
- features: options.flag_features.as_slice(),
+ features: &options.flag_features,
no_default_features: options.flag_no_default_features,
spec: options.flag_package.as_ref().map(|s| s.as_slice()),
lib_only: false,
};
let err = try!(ops::run_tests(&root, &ops,
- options.arg_args.as_slice()).map_err(|err| {
+ &options.arg_args).map_err(|err| {
CliError::from_boxed(err, 101)
}));
match err {
self.specified_req.as_ref().map(|s| s.as_slice())
}
- pub fn get_name(&self) -> &str {
- self.name.as_slice()
- }
+ pub fn get_name(&self) -> &str { &self.name }
/// Returns the place where this dependency must be searched for.
pub fn get_source_id(&self) -> &SourceId {
/// Returns true if the default features of the dependency are requested.
pub fn uses_default_features(&self) -> bool { self.default_features }
/// Returns the list of features that are requested by the dependency.
- pub fn get_features(&self) -> &[String] { self.features.as_slice() }
+ pub fn get_features(&self) -> &[String] { &self.features }
/// Returns true if the package (`sum`) can fulfill this dependency request.
pub fn matches(&self, sum: &Summary) -> bool {
/// Returns true if the package (`id`) can fulfill this dependency request.
pub fn matches_id(&self, id: &PackageId) -> bool {
- self.name.as_slice() == id.get_name() &&
+ self.name == id.get_name() &&
(self.only_match_name || (self.req.matches(id.get_version()) &&
&self.source_id == id.get_source_id()))
}
pub fn is_active_for_platform(&self, platform: &str) -> bool {
match self.only_for_platform {
None => true,
- Some(ref p) if p.as_slice() == platform => true,
+ Some(ref p) if *p == platform => true,
_ => false
}
}
}
}
- pub fn from_strs<S: Str>(strings: Vec<S>) -> CargoResult<Vec<LibKind>> {
- strings.iter().map(|s| LibKind::from_str(s.as_slice())).collect()
- }
-
/// Returns the argument suitable for `--crate-type` to pass to rustc.
pub fn crate_type(&self) -> &'static str {
match *self {
}
pub fn is_compile(&self) -> bool {
- self.env.as_slice() == "compile"
+ self.env == "compile"
}
pub fn is_doc(&self) -> bool {
}
pub fn get_env(&self) -> &str {
- self.env.as_slice()
+ &self.env
}
pub fn get_dest(&self) -> Option<&str> {
}
pub fn get_targets(&self) -> &[Target] {
- self.targets.as_slice()
+ &self.targets
}
pub fn get_target_dir(&self) -> &Path {
}
pub fn get_warnings(&self) -> &[String] {
- self.warnings.as_slice()
+ &self.warnings
}
pub fn get_exclude(&self) -> &[String] {
- self.exclude.as_slice()
+ &self.exclude
}
pub fn get_include(&self) -> &[String] {
- self.include.as_slice()
+ &self.include
}
pub fn get_metadata(&self) -> &ManifestMetadata { &self.metadata }
}
pub fn get_name(&self) -> &str {
- self.name.as_slice()
+ &self.name
}
pub fn get_src_path(&self) -> &Path {
}
pub fn get_packages(&self) -> &[Package] {
- self.packages.as_slice()
+ &self.packages
}
// For now, assume that the package set contains only one package with a
.map(|dep| dep.get_name())
.collect();
- graph.add(pkg.get_name(), deps.as_slice());
+ graph.add(pkg.get_name(), &deps);
}
let pkgs = match graph.sort() {
fn decode<D: Decoder>(d: &mut D) -> Result<PackageId, D::Error> {
let string: String = try!(Decodable::decode(d));
let regex = Regex::new(r"^([^ ]+) ([^ ]+) \(([^\)]+)\)$").unwrap();
- let captures = regex.captures(string.as_slice()).expect("invalid serialized PackageId");
+ let captures = regex.captures(&string).expect("invalid serialized PackageId");
let name = captures.at(1).unwrap();
let version = captures.at(2).unwrap();
}
pub fn get_name(&self) -> &str {
- self.inner.name.as_slice()
+ &self.inner.name
}
pub fn get_version(&self) -> &semver::Version {
pub fn generate_metadata(&self) -> Metadata {
let metadata = short_hash(
- &(self.inner.name.as_slice(), self.inner.version.to_string(),
+ &(&self.inner.name, self.inner.version.to_string(),
&self.inner.source_id));
let extra_filename = format!("-{}", metadata);
impl Metadata {
pub fn mix<T: Hash<SipHasher>>(&mut self, t: &T) {
- let new_metadata = short_hash(&(self.metadata.as_slice(), t));
+ let new_metadata = short_hash(&(&self.metadata, t));
self.extra_filename = format!("-{}", new_metadata);
self.metadata = new_metadata;
}
Err(..) => {}
}
if !spec.contains("://") {
- match url(format!("cargo://{}", spec).as_slice()) {
+ match url(&format!("cargo://{}", spec)) {
Ok(url) => return PackageIdSpec::from_url(url),
Err(..) => {}
}
}
}
- let mut parts = spec.as_slice().splitn(1, ':');
+ let mut parts = spec.splitn(1, ':');
let name = parts.next().unwrap();
let version = match parts.next() {
Some(version) => Some(try!(Version::parse(version).map_err(human))),
}));
match frag {
Some(fragment) => {
- let mut parts = fragment.as_slice().splitn(1, ':');
+ let mut parts = fragment.splitn(1, ':');
let name_or_version = parts.next().unwrap();
match parts.next() {
Some(part) => {
})
}
- pub fn get_name(&self) -> &str { self.name.as_slice() }
+ pub fn get_name(&self) -> &str { &self.name }
pub fn get_version(&self) -> Option<&Version> { self.version.as_ref() }
pub fn get_url(&self) -> Option<&Url> { self.url.as_ref() }
let mut printed_name = false;
match self.url {
Some(ref url) => {
- if url.scheme.as_slice() == "cargo" {
+ if url.scheme == "cargo" {
try!(write!(f, "{}/{}", url.host().unwrap(),
url.path().unwrap().connect("/")));
} else {
fn ok(spec: &str, expected: PackageIdSpec) {
let parsed = PackageIdSpec::parse(spec).unwrap();
assert_eq!(parsed, expected);
- assert_eq!(parsed.to_string().as_slice(), spec);
+ assert_eq!(parsed.to_string(), spec);
}
ok("http://crates.io/foo#1.2.3", PackageIdSpec {
impl EncodableDependency {
fn to_package_id(&self, default_source: &SourceId) -> CargoResult<PackageId> {
PackageId::new(
- self.name.as_slice(),
- self.version.as_slice(),
+ &self.name,
+ &self.version,
self.source.as_ref().unwrap_or(default_source))
}
}
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let mut out = format!("{} {}", self.name, self.version);
if let Some(ref s) = self.source {
- out.push_str(format!(" ({})", s.to_url()).as_slice());
+ out.push_str(&format!(" ({})", s.to_url()));
}
out.encode(s)
}
fn decode<D: Decoder>(d: &mut D) -> Result<EncodablePackageId, D::Error> {
let string: String = try!(Decodable::decode(d));
let regex = Regex::new(r"^([^ ]+) ([^ ]+)(?: \(([^\)]+)\))?$").unwrap();
- let captures = regex.captures(string.as_slice())
+ let captures = regex.captures(&string)
.expect("invalid serialized PackageId");
let name = captures.at(1).unwrap();
impl EncodablePackageId {
fn to_package_id(&self, default_source: &SourceId) -> CargoResult<PackageId> {
PackageId::new(
- self.name.as_slice(),
- self.version.as_slice(),
+ &self.name,
+ &self.version,
self.source.as_ref().unwrap_or(default_source))
}
}
}
for id in ids.iter() {
if version_cnt[id.get_version()] == 1 {
- msg.push_str(format!("\n {}:{}", spec.get_name(),
- id.get_version()).as_slice());
+ msg.push_str(&format!("\n {}:{}", spec.get_name(),
+ id.get_version()));
} else {
- msg.push_str(format!("\n {}",
- PackageIdSpec::from_package_id(*id))
- .as_slice());
+ msg.push_str(&format!("\n {}",
+ PackageIdSpec::from_package_id(*id)));
}
}
}
cur: usize) -> CargoResult<CargoResult<Box<Context>>> {
if cur == deps.len() { return Ok(Ok(cx)) }
let (dep, ref candidates, ref features) = deps[cur];
- let method = Method::Required(false, features.as_slice(),
+ let method = Method::Required(false, &features,
dep.uses_default_features(), platform);
let key = (dep.get_name().to_string(), dep.get_source_id().clone());
for edge in edges {
if edge != v.get_package_id() { continue }
- msg.push_str(format!("\n version {} in use by {}",
- v.get_version(), edge).as_slice());
+ msg.push_str(&format!("\n version {} in use by {}",
+ v.get_version(), edge));
continue 'outer;
}
}
- msg.push_str(format!("\n version {} in use by ??",
- v.get_version()).as_slice());
+ msg.push_str(&format!("\n version {} in use by ??",
+ v.get_version()));
}
msg.push_str(&format!("\n possible versions to select: {}",
msg.push_str("\nversions found: ");
for (i, c) in candidates.iter().take(3).enumerate() {
if i != 0 { msg.push_str(", "); }
- msg.push_str(c.get_version().to_string().as_slice());
+ msg.push_str(&c.get_version().to_string());
}
if candidates.len() > 3 {
msg.push_str(", ...");
let deps = deps.filter(|d| {
match method {
Method::Required(_, _, _, Some(ref platform)) => {
- d.is_active_for_platform(platform.as_slice())
+ d.is_active_for_platform(platform)
},
_ => true
}
let mut base = feature_deps.remove(dep.get_name()).unwrap_or(vec![]);
for feature in dep.get_features().iter() {
base.push(feature.clone());
- if feature.as_slice().contains("/") {
+ if feature.contains("/") {
return Err(human(format!("features in dependencies \
cannot enable features in \
other dependencies: `{}`",
match method {
Method::Everything => {
for key in s.get_features().keys() {
- try!(add_feature(s, key.as_slice(), &mut deps, &mut used,
- &mut visited));
+ try!(add_feature(s, key, &mut deps, &mut used, &mut visited));
}
for dep in s.get_dependencies().iter().filter(|d| d.is_optional()) {
try!(add_feature(s, dep.get_name(), &mut deps, &mut used,
}
Method::Required(_, requested_features, _, _) => {
for feat in requested_features.iter() {
- try!(add_feature(s, feat.as_slice(), &mut deps, &mut used,
- &mut visited));
+ try!(add_feature(s, feat, &mut deps, &mut used, &mut visited));
}
}
}
match s.get_features().get(feat) {
Some(recursive) => {
for f in recursive.iter() {
- try!(add_feature(s, f.as_slice(), deps, used,
+ try!(add_feature(s, f, deps, used,
visited));
}
}
pub fn say<T: ToString>(&mut self, message: T, color: Color) -> IoResult<()> {
try!(self.reset());
if color != BLACK { try!(self.fg(color)); }
- try!(self.write_line(message.to_string().as_slice()));
+ try!(self.write_line(&message.to_string()));
try!(self.reset());
try!(self.flush());
Ok(())
try!(self.reset());
if color != BLACK { try!(self.fg(color)); }
if self.supports_attr(Attr::Bold) { try!(self.attr(Attr::Bold)); }
- try!(self.write_str(format!("{:>12}", status).as_slice()));
+ try!(self.write_str(&format!("{:>12}", status)));
try!(self.reset());
- try!(self.write_line(format!(" {}", message).as_slice()));
+ try!(self.write_line(&format!(" {}", message)));
try!(self.flush());
Ok(())
}
/// 656c58fb7c5ef5f12bc747f".to_string());
/// ```
pub fn from_url(string: String) -> SourceId {
- let mut parts = string.as_slice().splitn(1, '+');
+ let mut parts = string.splitn(1, '+');
let kind = parts.next().unwrap();
let url = parts.next().unwrap();
pub fn to_ref_string(&self) -> Option<String> {
match *self {
GitReference::Branch(ref s) => {
- if s.as_slice() == "master" {
+ if *s == "master" {
None
} else {
Some(format!("branch={}", s))
let mut ret = Vec::new();
for source in self.sources.iter() {
- ret.push_all(try!(source.get(packages)).as_slice());
+ ret.extend(try!(source.get(packages)).into_iter());
}
Ok(ret)
}
for (feature, list) in features.iter() {
for dep in list.iter() {
- let mut parts = dep.as_slice().splitn(1, '/');
+ let mut parts = dep.splitn(1, '/');
let dep = parts.next().unwrap();
let is_reexport = parts.next().is_some();
if !is_reexport && features.get(dep).is_some() { continue }
}
pub fn get_dependencies(&self) -> &[Dependency] {
- self.dependencies.as_slice()
+ &self.dependencies
}
pub fn get_features(&self) -> &HashMap<String, Vec<String>> {
CliError::new("Standard in did not exist or was not UTF-8", 1)
}));
- let json = try!(Json::from_str(input.as_slice()).map_err(|_| {
+ let json = try!(Json::from_str(&input).map_err(|_| {
CliError::new("Could not parse standard in as JSON", 1)
}));
let mut decoder = json::Decoder::new(json);
try!(rm_rf(&layout.native(&pkg)));
try!(rm_rf(&layout.fingerprint(&pkg)));
for filename in try!(cx.target_filenames(target)).iter() {
- let filename = filename.as_slice();
try!(rm_rf(&layout.dest().join(filename)));
try!(rm_rf(&layout.deps().join(filename)));
}
let target = target.map(|s| s.to_string());
let features = features.iter().flat_map(|s| {
- s.as_slice().split(' ')
+ s.split(' ')
}).map(|s| s.to_string()).collect::<Vec<String>>();
if spec.is_some() && (no_default_features || features.len() > 0) {
try!(registry.add_overrides(override_ids));
let platform = target.as_ref().map(|e| e.as_slice()).or(Some(rustc_host.as_slice()));
- let method = Method::Required(dev_deps, features.as_slice(),
+ let method = Method::Required(dev_deps, &features,
!no_default_features, platform);
let resolved_with_overrides =
try!(ops::resolve_with_previous(&mut registry, package, method,
let req: Vec<PackageId> = resolved_with_overrides.iter().map(|r| {
r.clone()
}).collect();
- let packages = try!(registry.get(req.as_slice()).chain_error(|| {
+ let packages = try!(registry.get(&req).chain_error(|| {
human("Unable to get packages from source")
}));
let _p = profile::start("compiling");
let lib_overrides = try!(scrape_build_config(config, jobs, target));
- try!(ops::compile_targets(env.as_slice(), targets.as_slice(), to_build,
- &PackageSet::new(packages.as_slice()),
+ try!(ops::compile_targets(&env, &targets, to_build,
+ &PackageSet::new(&packages),
&resolve_with_overrides, &sources,
config, lib_overrides, exec_engine.clone()))
};
// The path listed next to the string is the config file in which the
// key was located, so we want to pop off the `.cargo/config` component
// to get the directory containing the `.cargo` folder.
- p.dir_path().dir_path().join(s.as_slice())
+ p.dir_path().dir_path().join(s)
}).filter(|p| {
// Make sure we don't override the local package, even if it's in the
// list of override paths.
if k == "rustc-flags" {
let whence = format!("in `{}` (in {:?})", key, path);
let (paths, links) = try!(
- BuildOutput::parse_rustc_flags(v.as_slice(), &whence[])
+ BuildOutput::parse_rustc_flags(&v, &whence[])
);
output.library_paths.extend(paths.into_iter());
output.library_links.extend(links.into_iter());
"none" => VersionControl::NoVcs,
n => {
let err = format!("could not decode '{}' as version control", n);
- return Err(d.error(err.as_slice()));
+ return Err(d.error(&err));
}
})
}
for c in name.chars() {
if c.is_alphanumeric() { continue }
if c == '_' || c == '-' { continue }
- return Err(human(format!("Invalid character `{}` in crate name: `{}`",
- c, name).as_slice()));
+ return Err(human(&format!("Invalid character `{}` in crate name: `{}`",
+ c, name)));
}
mk(config, &path, name, &opts).chain_error(|| {
human(format!("Failed to create project `{}` at `{}`",
(None, None, name, None) => name,
};
- try!(File::create(&path.join("Cargo.toml")).write_str(format!(
+ try!(File::create(&path.join("Cargo.toml")).write_str(&format!(
r#"[package]
name = "{}"
version = "0.0.1"
authors = ["{}"]
-"#, name, author).as_slice()));
+"#, name, author)));
try!(fs::mkdir(&path.join("src"), old_io::USER_RWX));
};
let email = git_config.and_then(|g| g.get_str("user.email").ok());
- let name = name.as_slice().trim().to_string();
- let email = email.map(|s| s.as_slice().trim().to_string());
+ let name = name.trim().to_string();
+ let email = email.map(|s| s.trim().to_string());
Ok((name, email))
}
if !missing.is_empty() {
let mut things = missing[..missing.len() - 1].connect(", ");
- // things will be empty if and only if length == 1 (i.e. the only case to have no `or`).
+ // things will be empty if and only if length == 1 (i.e. the only case
+ // to have no `or`).
if !things.is_empty() {
things.push_str(" or ");
}
- things.push_str(missing.last().unwrap().as_slice());
+ things.push_str(&missing.last().unwrap());
try!(config.shell().warn(
- format!("warning: manifest has no {things}. \
+ &format!("warning: manifest has no {things}. \
See http://doc.crates.io/manifest.html#package-metadata for more info.",
- things = things).as_slice()))
+ things = things)))
}
Ok(())
}
}));
let mut file = try!(File::open(file));
try!(config.shell().verbose(|shell| {
- shell.status("Archiving", relative.as_slice())
+ shell.status("Archiving", &relative)
}));
let path = format!("{}-{}{}{}", pkg.get_name(),
pkg.get_version(), old_path::SEP, relative);
- try!(ar.append(path.as_slice(), &mut file).chain_error(|| {
+ try!(ar.append(&path, &mut file).chain_error(|| {
internal(format!("could not archive source file `{}`", relative))
}));
}
let layout = project_layout(&path.dir_path());
let (manifest, nested) =
- try!(read_manifest(data.as_slice(), layout, source_id, config));
+ try!(read_manifest(&data, layout, source_id, config));
Ok((Package::new(manifest, path, source_id), nested))
}
TargetKind::Example => a.is_example(),
TargetKind::Lib(_) => false,
};
- let matches_name = name.as_ref().map_or(true, |n| n.as_slice() == a.get_name());
+ let matches_name = name.as_ref().map_or(true, |n| *n == a.get_name());
matches_kind && matches_name && a.get_profile().get_env() == env &&
!a.get_profile().is_custom_build()
});
}
search_path.push(self.root_output.clone());
search_path.push(self.deps_output.clone());
- let search_path = try!(util::join_paths(search_path.as_slice(),
+ let search_path = try!(util::join_paths(&search_path,
DynamicLibrary::envvar()));
let mut cmd = try!(CommandPrototype::new(cmd)).env(
- DynamicLibrary::envvar(), Some(search_path.as_slice()));
+ DynamicLibrary::envvar(), Some(&search_path));
for (k, v) in self.extra_env.iter() {
- cmd = cmd.env(k.as_slice(), v.as_ref().map(|s| s.as_slice()));
+ cmd = cmd.env(k, v.as_ref());
}
Ok(cmd.env("CARGO_MANIFEST_DIR", Some(pkg.get_manifest_path().dir_path()))
for (i, x) in v.pre.iter().enumerate() {
if i != 0 { ret.push('.') };
- ret.push_str(x.to_string().as_slice());
+ ret.push_str(&x.to_string());
}
Some(ret)
};
let output = try!(process.exec_with_output());
- let error = str::from_utf8(output.error.as_slice()).unwrap();
- let output = str::from_utf8(output.output.as_slice()).unwrap();
+ let error = str::from_utf8(&output.error).unwrap();
+ let output = str::from_utf8(&output.output).unwrap();
let mut lines = output.lines();
let nodylib = Regex::new("unsupported crate type.*dylib").unwrap();
let nobin = Regex::new("unsupported crate type.*bin").unwrap();
match *pair {
None => return Err(human(format!("dylib outputs are not supported \
for {}", triple))),
- Some((ref s1, ref s2)) => Ok((s1.as_slice(), s2.as_slice())),
+ Some((ref s1, ref s2)) => Ok((s1, s2)),
}
}
/// Return the target triple which this context is targeting.
pub fn target_triple(&self) -> &str {
- self.target_triple.as_slice()
+ &self.target_triple
}
/// Return the exact filename of the target.
target.get_profile().is_test() {
ret.push(format!("{}{}", stem,
if target.get_profile().is_for_host() {
- self.host_exe.as_slice()
+ &self.host_exe
} else {
- self.target_exe.as_slice()
+ &self.target_exe
}));
} else {
if target.is_dylib() {
match cx.resolve.features(pkg.get_package_id()) {
Some(features) => {
for feat in features.iter() {
- p = p.env(format!("CARGO_FEATURE_{}",
- super::envify(feat.as_slice())).as_slice(),
+ p = p.env(&format!("CARGO_FEATURE_{}", super::envify(feat)),
Some("1"));
}
}
for &(ref name, ref id) in lib_deps.iter() {
let data = &build_state[(id.clone(), kind)].metadata;
for &(ref key, ref value) in data.iter() {
- p = p.env(format!("DEP_{}_{}",
- super::envify(name.as_slice()),
- super::envify(key.as_slice())).as_slice(),
- Some(value.as_slice()));
+ p = p.env(&format!("DEP_{}_{}", super::envify(name),
+ super::envify(key)),
+ Some(value));
}
}
p = try!(super::add_plugin_deps(p, &build_state, plugin_deps));
// This is also the location where we provide feedback into the build
// state informing what variables were discovered via our script as
// well.
- let output = try!(str::from_utf8(output.output.as_slice()).chain_error(|| {
+ let output = try!(str::from_utf8(&output.output).chain_error(|| {
human("build script output was not valid utf-8")
}));
- let parsed_output = try!(BuildOutput::parse(output, pkg_name.as_slice()));
+ let parsed_output = try!(BuildOutput::parse(output, &pkg_name));
build_state.insert(id, req, parsed_output);
try!(File::create(&build_output.dir_path().join("output"))
human(format!("failed to read cached build command output: {}", e))
}));
let contents = try!(f.read_to_string());
- let output = try!(BuildOutput::parse(contents.as_slice(),
- pkg_name.as_slice()));
+ let output = try!(BuildOutput::parse(&contents, &pkg_name));
build_state.insert(id, req, output);
fresh.call(tx)
};
if key == "rustc-flags" {
- let whence = whence.as_slice();
let (libs, links) = try!(
- BuildOutput::parse_rustc_flags(value, whence)
+ BuildOutput::parse_rustc_flags(value, &whence)
);
library_links.extend(links.into_iter());
library_paths.extend(libs.into_iter());
}
pub fn get_args(&self) -> &[CString] {
- self.args.as_slice()
+ &self.args
}
pub fn cwd(mut self, path: Path) -> CommandPrototype {
builder = builder.arg(arg);
}
for (key, val) in self.env.into_iter() {
- builder = builder.env(key.as_slice(), val.as_ref());
+ builder = builder.env(&key, val.as_ref());
}
builder = builder.cwd(self.cwd);
let fingerprint = try!(fingerprint.resolve(true).chain_error(|| {
internal("failed to resolve a pending fingerprint")
}));
- try!(File::create(&loc).write_str(fingerprint.as_slice()));
+ try!(File::create(&loc).write_str(&fingerprint));
Ok(())
});
trace!("old fingerprint: {}", old_fingerprint);
trace!("new fingerprint: {}", new_fingerprint);
- Ok(old_fingerprint.as_slice() == new_fingerprint)
+ Ok(old_fingerprint == new_fingerprint)
}
fn calculate_target_mtime(dep_info: &Path) -> CargoResult<Option<u64>> {
Some(Ok(line)) => line,
_ => return Ok(None),
};
- let line = line.as_slice();
let mtime = try!(fs::stat(dep_info)).modified;
let pos = try!(line.find_str(": ").chain_error(|| {
internal(format!("dep-info not in an understood format: {}",
Some(s) => s.to_string(),
None => break,
};
- while file.as_slice().ends_with("\\") {
+ while file.ends_with("\\") {
file.pop();
file.push(' ');
file.push_str(deps.next().unwrap())
}
- match fs::stat(&cwd.join(file.as_slice())) {
+ match fs::stat(&cwd.join(&file)) {
Ok(stat) if stat.modified <= mtime => {}
Ok(stat) => {
info!("stale: {} -- {} vs {}", file, stat.modified, mtime);
internal("rustc -v didn't return utf8 output")
}));
let triple = {
- let triple = output.as_slice().lines().filter(|l| {
+ let triple = output.lines().filter(|l| {
l.starts_with("host: ")
}).map(|l| &l[6..]).next();
let triple = try!(triple.chain_error(|| {
internal("rustc -v didn't return utf8 output")
}));
let triple = {
- let triple = output.as_slice().lines().filter(|l| {
+ let triple = output.lines().filter(|l| {
l.starts_with("host: ")
}).map(|l| &l[6..]).next();
let triple = try!(triple.chain_error(|| {
}
let compiled = compiled.contains(dep.get_package_id());
- try!(compile(targets.as_slice(), dep, compiled, &mut cx, &mut queue));
+ try!(compile(&targets, dep, compiled, &mut cx, &mut queue));
}
try!(compile(targets, pkg, true, &mut cx, &mut queue));
}
if pass_l_flag && id == *current_id {
for name in output.library_links.iter() {
- rustc = rustc.arg("-l").arg(name.as_slice());
+ rustc = rustc.arg("-l").arg(name);
}
}
}
cx: &Context, req: Platform)
-> CargoResult<Vec<(CommandPrototype, Kind)>> {
let base = try!(process(CommandType::Rustc, package, target, cx));
- let base = build_base_args(cx, base, package, target, crate_types.as_slice());
+ let base = build_base_args(cx, base, package, target, &crate_types);
let target_cmd = build_plugin_args(base.clone(), cx, package, target, Kind::Target);
let plugin_cmd = build_plugin_args(base, cx, package, target, Kind::Host);
v.push_all(layout.root().as_vec());
v.push(old_path::SEP_BYTE);
v.push_all(filename.as_bytes());
- cmd = cmd.arg("--extern").arg(v.as_slice());
+ cmd = cmd.arg("--extern").arg(&v);
}
return Ok(cmd);
}
// We want to use the same environment and such as normal processes, but we
// want to override the dylib search path with the one we just calculated.
- let search_path = try!(join_paths(search_path.as_slice(),
- DynamicLibrary::envvar()));
+ let search_path = try!(join_paths(&search_path, DynamicLibrary::envvar()));
Ok(try!(cx.compilation.process(cmd, pkg))
- .env(DynamicLibrary::envvar(), Some(search_path.as_slice())))
+ .env(DynamicLibrary::envvar(), Some(&search_path)))
}
fn each_dep<'a, F>(pkg: &Package, cx: &'a Context, mut f: F)
let target_name = options.name;
let tests_to_run = compile.tests.iter().filter(|&&(ref test_name, _)| {
- target_name.map_or(true, |target_name| target_name == test_name.as_slice())
+ target_name.map_or(true, |target_name| target_name == *test_name)
});
let cwd = config.cwd();
let mut arg = pkg.get_name().as_bytes().to_vec();
arg.push(b'=');
arg.push_all(lib.as_vec());
- p = p.arg("--extern").arg(arg.as_slice());
+ p = p.arg("--extern").arg(arg);
}
}
let mut args = args.to_vec();
args.push("--bench".to_string());
- run_tests(manifest_path, options, args.as_slice())
+ run_tests(manifest_path, options, &args)
}
let s = try!(f.read_to_string());
- let table = toml::Value::Table(try!(cargo_toml::parse(s.as_slice(), path)));
+ let table = toml::Value::Table(try!(cargo_toml::parse(&s, path)));
let mut d = toml::Decoder::new(table);
let v: resolver::EncodableResolve = try!(Decodable::decode(&mut d));
Ok(Some(try!(v.to_resolve(sid))))
match e.toml.get(&"metadata".to_string()) {
Some(metadata) => {
out.push_str("[metadata]\n");
- out.push_str(metadata.to_string().as_slice());
+ out.push_str(&metadata.to_string());
}
None => {}
}
- try!(File::create(dst).write_str(out.as_slice()));
+ try!(File::create(dst).write_str(&out));
Ok(())
}
fn emit_package(dep: &toml::Table, out: &mut String) {
- out.push_str(format!("name = {}\n", lookup(dep, "name")).as_slice());
- out.push_str(format!("version = {}\n", lookup(dep, "version")).as_slice());
+ out.push_str(&format!("name = {}\n", lookup(dep, "name")));
+ out.push_str(&format!("version = {}\n", lookup(dep, "version")));
- if dep.contains_key(&"source".to_string()) {
- out.push_str(format!("source = {}\n", lookup(dep, "source")).as_slice());
+ if dep.contains_key("source") {
+ out.push_str(&format!("source = {}\n", lookup(dep, "source")));
}
- if let Some(ref s) = dep.get(&"dependencies".to_string()) {
+ if let Some(ref s) = dep.get("dependencies") {
let slice = Value::as_slice(*s).unwrap();
if !slice.is_empty() {
out.push_str("dependencies = [\n");
for child in slice.iter() {
- out.push_str(format!(" {},\n", child).as_slice());
+ out.push_str(&format!(" {},\n", child));
}
out.push_str("]\n");
}
fn lookup<'a>(table: &'a toml::Table, key: &str) -> &'a toml::Value {
- table.get(&key.to_string()).expect(format!("Didn't find {}", key).as_slice())
+ table.get(key).expect(&format!("didn't find {}", key))
}
} = *manifest.get_metadata();
let readme = match *readme {
Some(ref readme) => {
- let path = pkg.get_root().join(readme.as_slice());
+ let path = pkg.get_root().join(readme);
Some(try!(File::open(&path).read_to_string().chain_error(|| {
human("failed to read the specified README")
})))
} = try!(registry_configuration(config));
let token = token.or(token_config);
let index = index.or(index_config).unwrap_or(RegistrySource::default_url());
- let index = try!(index.as_slice().to_url().map_err(human));
+ let index = try!(index.to_url().map_err(human));
let sid = SourceId::for_registry(&index);
let api_host = {
let mut src = RegistrySource::new(&sid, config);
let v = v.iter().map(|s| s.as_slice()).collect::<Vec<_>>();
try!(config.shell().status("Owner", format!("adding `{:#?}` to `{}`",
v, name)));
- try!(registry.add_owners(name.as_slice(), v.as_slice()).map_err(|e| {
+ try!(registry.add_owners(&name, &v).map_err(|e| {
human(format!("failed to add owners: {}", e))
}));
}
let v = v.iter().map(|s| s.as_slice()).collect::<Vec<_>>();
try!(config.shell().status("Owner", format!("removing `{:?}` from `{}`",
v, name)));
- try!(registry.remove_owners(name.as_slice(), v.as_slice()).map_err(|e| {
+ try!(registry.remove_owners(&name, &v).map_err(|e| {
human(format!("failed to add owners: {}", e))
}));
}
}
if opts.list {
- let owners = try!(registry.list_owners(name.as_slice()).map_err(|e| {
+ let owners = try!(registry.list_owners(&name).map_err(|e| {
human(format!("failed to list owners: {}", e))
}));
for owner in owners.iter() {
if undo {
try!(config.shell().status("Unyank", format!("{}:{}", name, version)));
- try!(registry.unyank(name.as_slice(), version.as_slice()).map_err(|e| {
+ try!(registry.unyank(&name, &version).map_err(|e| {
human(format!("failed to undo a yank: {}", e))
}));
} else {
try!(config.shell().status("Yank", format!("{}:{}", name, version)));
- try!(registry.yank(name.as_slice(), version.as_slice()).map_err(|e| {
+ try!(registry.yank(&name, &version).map_err(|e| {
human(format!("failed to yank: {}", e))
}));
}
.map(|krate| (
format!("{} ({})", krate.name, krate.max_version),
krate.description.as_ref().map(|desc|
- truncate_with_ellipsis(desc.replace("\n", " ").as_slice(), 128))
+ truncate_with_ellipsis(&desc.replace("\n", " "), 128))
))
.collect::<Vec<_>>();
let description_margin = list_items.iter()
Some(desc) => {
let space = repeat(' ').take(description_margin - name.len())
.collect::<String>();
- name.to_string() + space.as_slice() + desc.as_slice()
+ name.to_string() + &space + &desc
}
None => name
};
let remote = GitRemote::new(source_id.get_url());
let ident = ident(source_id.get_url());
- let db_path = config.git_db_path()
- .join(ident.as_slice());
+ let db_path = config.git_db_path().join(&ident);
let reference_path = match *reference {
GitReference::Branch(ref s) |
let ident = url.path().unwrap_or(&[])
.last().map(|a| a.clone()).unwrap_or(String::new());
- let ident = if ident.as_slice() == "" {
+ let ident = if ident == "" {
"_empty".to_string()
} else {
ident
rel.default_port = Some(443);
let path = mem::replace(&mut rel.path, Vec::new());
rel.path = path.into_iter().map(|s| {
- s.as_slice().chars().map(|c| c.to_lowercase()).collect()
+ s.chars().map(|c| c.to_lowercase()).collect()
}).collect();
}
_ => {}
};
if needs_chopping {
let last = rel.path.pop().unwrap();
- let last = last.as_slice();
rel.path.push(last[..last.len() - 4].to_string())
}
}
#[test]
pub fn test_url_to_path_ident_with_path() {
let ident = ident(&url("https://github.com/carlhuda/cargo"));
- assert_eq!(ident.as_slice(), "cargo-51d6ede913e3e1d5");
+ assert_eq!(ident, "cargo-51d6ede913e3e1d5");
}
#[test]
pub fn test_url_to_path_ident_without_path() {
let ident = ident(&url("https://github.com"));
- assert_eq!(ident.as_slice(), "_empty-eba8a1ec0f6907fb");
+ assert_eq!(ident, "_empty-eba8a1ec0f6907fb");
}
#[test]
// Create a local anonymous remote in the repository to fetch the url
let url = self.url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
- fetch(dst, url.as_slice(), refspec)
+ fetch(dst, &url, refspec)
}
fn clone_into(&self, dst: &Path) -> CargoResult<git2::Repository> {
}
try!(mkdir_recursive(dst, USER_DIR));
let repo = try!(git2::Repository::init_bare(dst));
- try!(fetch(&repo, url.as_slice(), "refs/heads/*:refs/heads/*"));
+ try!(fetch(&repo, &url, "refs/heads/*:refs/heads/*"));
Ok(repo)
}
}
GitReference::Tag(ref s) => {
try!((|:| {
let refname = format!("refs/tags/{}", s);
- let id = try!(self.repo.refname_to_id(refname.as_slice()));
+ let id = try!(self.repo.refname_to_id(&refname));
let obj = try!(self.repo.find_object(id, None));
let obj = try!(obj.peel(ObjectType::Commit));
Ok(obj.id())
}
GitReference::Branch(ref s) => {
try!((|:| {
- let b = try!(self.repo.find_branch(s.as_slice(),
- git2::BranchType::Local));
+ let b = try!(self.repo.find_branch(s, git2::BranchType::Local));
b.get().target().chain_error(|| {
human(format!("branch `{}` did not have a target", s))
})
}))
}
GitReference::Rev(ref s) => {
- let obj = try!(self.repo.revparse_single(s.as_slice()));
+ let obj = try!(self.repo.revparse_single(s));
obj.id()
}
};
Ok(GitRevision(id))
}
- pub fn has_ref<S: Str>(&self, reference: S) -> CargoResult<()> {
- try!(self.repo.revparse_single(reference.as_slice()));
+ pub fn has_ref(&self, reference: &str) -> CargoResult<()> {
+ try!(self.repo.revparse_single(reference));
Ok(())
}
}
let url = try!(source.to_url().map_err(human));
let url = url.to_string();
- let repo = try!(git2::Repository::clone(url.as_slice(),
- into).chain_error(|| {
+ let repo = try!(git2::Repository::clone(&url, into).chain_error(|| {
internal(format!("failed to clone {} into {}", source.display(),
into.display()))
}));
let url = try!(self.database.path.to_url().map_err(human));
let url = url.to_string();
let refspec = "refs/heads/*:refs/heads/*";
- try!(fetch(&self.repo, url.as_slice(), refspec));
+ try!(fetch(&self.repo, &url, refspec));
Ok(())
}
let user = username.map(|s| s.to_string())
.or_else(|| cred_helper.username.clone())
.unwrap_or("git".to_string());
- git2::Cred::ssh_key_from_agent(user.as_slice())
+ git2::Cred::ssh_key_from_agent(&user)
} else if allowed.contains(git2::USER_PASS_PLAINTEXT) {
git2::Cred::credential_helper(cfg, url, username)
} else if allowed.contains(git2::DEFAULT) {
with_authentication(url, &try!(repo.config()), |f| {
let mut cb = git2::RemoteCallbacks::new();
cb.credentials(|a, b, c| f(a, b, c));
- let mut remote = try!(repo.remote_anonymous(url.as_slice(),
- Some(refspec)));
+ let mut remote = try!(repo.remote_anonymous(&url, Some(refspec)));
try!(remote.add_fetch("refs/tags/*:refs/tags/*"));
remote.set_callbacks(&mut cb);
try!(remote.fetch(&["refs/tags/*:refs/tags/*", refspec], None, None));
let root = pkg.get_manifest_path().dir_path();
let parse = |&: p: &String| {
- Pattern::new(p.as_slice()).map_err(|e| {
+ Pattern::new(p).map_err(|e| {
human(format!("could not parse pattern `{}`: {}", p, e))
})
};
let mut ret = Vec::new();
'outer: for entry in index.iter() {
- let fname = entry.path.as_slice();
+ let fname = &entry.path[];
let file_path = root.join(fname);
// Filter out files outside this package.
let ident = source_id.get_url().host().unwrap().to_string();
let part = format!("{}-{}", ident, hash);
RegistrySource {
- checkout_path: config.registry_index_path().join(part.as_slice()),
- cache_path: config.registry_cache_path().join(part.as_slice()),
- src_path: config.registry_source_path().join(part.as_slice()),
+ checkout_path: config.registry_index_path().join(&part),
+ cache_path: config.registry_cache_path().join(&part),
+ src_path: config.registry_source_path().join(&part),
config: config,
source_id: source_id.clone(),
handle: None,
pub fn url(config: &Config) -> CargoResult<Url> {
let config = try!(ops::registry_configuration(config));
let url = config.index.unwrap_or(DEFAULT.to_string());
- url.as_slice().to_url().map_err(human)
+ url.to_url().map_err(human)
}
/// Get the default url for the registry
pub fn config(&self) -> CargoResult<RegistryConfig> {
let mut f = try!(File::open(&self.checkout_path.join("config.json")));
let contents = try!(f.read_to_string());
- let config = try!(json::decode(contents.as_slice()));
+ let config = try!(json::decode(&contents));
Ok(config)
}
state.update(resp.get_body());
state.finish()
};
- if actual.as_slice().to_hex() != *expected {
+ if actual.to_hex() != *expected {
return Err(human(format!("Failed to verify the checksum of `{}`",
pkg)))
}
Ok(mut f) => {
let contents = try!(f.read_to_string());
let ret: CargoResult<Vec<(Summary, bool)>>;
- ret = contents.as_slice().lines().filter(|l| l.trim().len() > 0)
+ ret = contents.lines().filter(|l| l.trim().len() > 0)
.map(|l| self.parse_registry_package(l))
.collect();
try!(ret.chain_error(|| {
let RegistryPackage {
name, vers, cksum, deps, features, yanked
} = try!(json::decode::<RegistryPackage>(line));
- let pkgid = try!(PackageId::new(name.as_slice(),
- vers.as_slice(),
- &self.source_id));
+ let pkgid = try!(PackageId::new(&name, &vers, &self.source_id));
let deps: CargoResult<Vec<Dependency>> = deps.into_iter().map(|dep| {
self.parse_registry_dependency(dep)
}).collect();
name, req, features, optional, default_features, target, kind
} = dep;
- let dep = try!(Dependency::parse(name.as_slice(), Some(req.as_slice()),
+ let dep = try!(Dependency::parse(&name, Some(&req),
&self.source_id));
let kind = match kind.as_ref().map(|s| s.as_slice()).unwrap_or("") {
"dev" => Kind::Development,
// git fetch origin
let url = self.source_id.get_url().to_string();
let refspec = "refs/heads/*:refs/remotes/origin/*";
- try!(git::fetch(&repo, url.as_slice(), refspec).chain_error(|| {
+ try!(git::fetch(&repo, &url, refspec).chain_error(|| {
internal(format!("failed to fetch `{}`", url))
}));
fn download(&mut self, packages: &[PackageId]) -> CargoResult<()> {
let config = try!(self.config());
- let url = try!(config.dl.as_slice().to_url().map_err(internal));
+ let url = try!(config.dl.to_url().map_err(internal));
for package in packages.iter() {
if self.source_id != *package.get_source_id() { continue }
}
/// Return the output of `rustc -v verbose`
- pub fn rustc_version(&self) -> &str {
- self.rustc_version.as_slice()
- }
+ pub fn rustc_version(&self) -> &str { &self.rustc_version }
/// Return the host platform and default target of rustc
- pub fn rustc_host(&self) -> &str {
- self.rustc_host.as_slice()
- }
+ pub fn rustc_host(&self) -> &str { &self.rustc_host }
pub fn values(&self) -> CargoResult<Ref<HashMap<String, ConfigValue>>> {
if !self.values_loaded.get() {
try!(walk_tree(&self.cwd, |mut file| {
let path = file.path().clone();
let contents = try!(file.read_to_string());
- let table = try!(cargo_toml::parse(contents.as_slice(),
- &path).chain_error(|| {
+ let table = try!(cargo_toml::parse(&contents, &path).chain_error(|| {
human(format!("could not parse TOML configuration in `{}`",
path.display()))
}));
pub fn string(&self) -> CargoResult<(&str, &Path)> {
match *self {
- CV::String(ref s, ref p) => Ok((s.as_slice(), p)),
+ CV::String(ref s, ref p) => Ok((s, p)),
_ => self.expected("string"),
}
}
pub fn list(&self) -> CargoResult<&[(String, Path)]> {
match *self {
- CV::List(ref list, _) => Ok(list.as_slice()),
+ CV::List(ref list, _) => Ok(list),
_ => self.expected("list"),
}
}
};
try!(fs::mkdir_recursive(&file.dir_path(), old_io::USER_DIR));
let contents = File::open(&file).read_to_string().unwrap_or("".to_string());
- let mut toml = try!(cargo_toml::parse(contents.as_slice(), &file));
+ let mut toml = try!(cargo_toml::parse(&contents, &file));
toml.insert(key.to_string(), value.into_toml());
- try!(File::create(&file).write_all(toml::Value::Table(toml).to_string().as_bytes()));
+ let mut out = try!(File::create(&file));
+ try!(out.write_all(toml::Value::Table(toml).to_string().as_bytes()));
Ok(())
}
}
impl Error for ProcessError {
- fn description(&self) -> &str { self.desc.as_slice() }
+ fn description(&self) -> &str { &self.desc }
fn cause(&self) -> Option<&Error> {
self.cause.as_ref().map(|s| s as &Error)
}
}
impl Error for ConcreteCargoError {
- fn description(&self) -> &str { self.description.as_slice() }
+ fn description(&self) -> &str { &self.description }
fn cause(&self) -> Option<&Error> {
self.cause.as_ref().map(|c| {
let e: &Error = &**c; e
}
impl CliError {
- pub fn new<S: Str>(error: S, code: i32) -> CliError {
- let error = human(error.as_slice().to_string());
+ pub fn new(error: &str, code: i32) -> CliError {
+ let error = human(error.to_string());
CliError::from_boxed(error, code)
}
// =============================================================================
// Construction helpers
-pub fn process_error<S: Str>(msg: S,
- cause: Option<IoError>,
- status: Option<&ProcessExit>,
- output: Option<&ProcessOutput>) -> ProcessError {
+pub fn process_error(msg: &str,
+ cause: Option<IoError>,
+ status: Option<&ProcessExit>,
+ output: Option<&ProcessOutput>) -> ProcessError {
let exit = match status {
Some(&ExitStatus(i)) | Some(&ExitSignal(i)) => i.to_string(),
None => "never executed".to_string(),
};
- let mut desc = format!("{} (status={})", msg.as_slice(), exit);
+ let mut desc = format!("{} (status={})", &msg, exit);
if let Some(out) = output {
- match str::from_utf8(out.output.as_slice()) {
+ match str::from_utf8(&out.output) {
Ok(s) if s.trim().len() > 0 => {
desc.push_str("\n--- stdout\n");
desc.push_str(s);
}
Ok(..) | Err(..) => {}
}
- match str::from_utf8(out.error.as_slice()) {
+ match str::from_utf8(&out.error) {
Ok(s) if s.trim().len() > 0 => {
desc.push_str("\n--- stderr\n");
desc.push_str(s);
}
}
-pub fn internal_error<S1: Str, S2: Str>(error: S1,
- detail: S2) -> Box<CargoError> {
+pub fn internal_error(error: &str, detail: &str) -> Box<CargoError> {
Box::new(ConcreteCargoError {
- description: error.as_slice().to_string(),
- detail: Some(detail.as_slice().to_string()),
+ description: error.to_string(),
+ detail: Some(detail.to_string()),
cause: None,
is_human: false
})
}
pub fn get_args(&self) -> &[CString] {
- self.args.as_slice()
+ &self.args
}
pub fn cwd(mut self, path: Path) -> ProcessBuilder {
.stdin(InheritFd(0));
let exit = try!(command.status().map_err(|e| {
- process_error(format!("Could not execute process `{}`",
- self.debug_string()),
+ process_error(&format!("Could not execute process `{}`",
+ self.debug_string()),
Some(e), None, None)
}));
if exit.success() {
Ok(())
} else {
- Err(process_error(format!("Process didn't exit successfully: `{}`",
- self.debug_string()),
+ Err(process_error(&format!("Process didn't exit successfully: `{}`",
+ self.debug_string()),
None, Some(&exit), None))
}
}
let command = self.build_command();
let output = try!(command.output().map_err(|e| {
- process_error(format!("Could not execute process `{}`",
- self.debug_string()),
+ process_error(&format!("Could not execute process `{}`",
+ self.debug_string()),
Some(e), None, None)
}));
if output.status.success() {
Ok(output)
} else {
- Err(process_error(format!("Process didn't exit successfully: `{}`",
- self.debug_string()),
+ Err(process_error(&format!("Process didn't exit successfully: `{}`",
+ self.debug_string()),
None, Some(&output.status), Some(&output)))
}
}
command.arg(arg);
}
for (k, v) in self.env.iter() {
- let k = k.as_slice();
match *v {
Some(ref v) => { command.env(k, v); }
None => { command.env_remove(k); }
let mut last = 0;
for (i, &(l, time, ref msg)) in msgs.iter().enumerate() {
if l != lvl { continue }
- println!("{} {:6}ms - {}", repeat(" ").take(lvl + 1).collect::<String>(),
- time / 1000000, msg);
+ println!("{} {:6}ms - {}",
+ repeat(" ").take(lvl + 1).collect::<String>(),
+ time / 1000000, msg);
print(lvl + 1, &msgs[last..i]);
last = i;
}
MESSAGES.with(|msgs_rc| {
let mut msgs = msgs_rc.borrow_mut();
- msgs.push((0, end - start, mem::replace(&mut self.desc, String::new())));
- print(0, msgs.as_slice());
+ msgs.push((0, end - start,
+ mem::replace(&mut self.desc, String::new())));
+ print(0, &msgs);
});
} else {
MESSAGES.with(|msgs| {
}
}
}
+
+impl<'a> ToSemver for &'a String {
+ fn to_semver(self) -> Result<Version, String> {
+ (**self).to_semver()
+ }
+}
add_unused_keys(m, v, if key.len() == 0 {
k.clone()
} else {
- key.clone() + "." + k.as_slice()
+ key.clone() + "." + k
})
}
}
}
pub fn parse(toml: &str, file: &Path) -> CargoResult<toml::Table> {
- let mut parser = toml::Parser::new(toml.as_slice());
+ let mut parser = toml::Parser::new(&toml);
match parser.parse() {
Some(toml) => return Ok(toml),
None => {}
for error in parser.errors.iter() {
let (loline, locol) = parser.to_linecol(error.lo);
let (hiline, hicol) = parser.to_linecol(error.hi);
- error_str.push_str(format!("{}:{}:{}{} {}\n",
- file.display(),
- loline + 1, locol + 1,
- if loline != hiline || locol != hicol {
- format!("-{}:{}", hiline + 1,
- hicol + 1)
- } else {
- "".to_string()
- },
- error.desc).as_slice());
+ error_str.push_str(&format!("{}:{}:{}{} {}\n",
+ file.display(),
+ loline + 1, locol + 1,
+ if loline != hiline || locol != hicol {
+ format!("-{}:{}", hiline + 1,
+ hicol + 1)
+ } else {
+ "".to_string()
+ },
+ error.desc));
}
Err(human(error_str))
}
impl<T> ManyOrOne<T> {
fn as_slice(&self) -> &[T] {
match *self {
- ManyOrOne::Many(ref v) => v.as_slice(),
+ ManyOrOne::Many(ref v) => v,
ManyOrOne::One(ref t) => slice::ref_slice(t),
}
}
impl Decodable for TomlVersion {
fn decode<D: Decoder>(d: &mut D) -> Result<TomlVersion, D::Error> {
let s = try!(d.read_str());
- match s.as_slice().to_semver() {
+ match s.to_semver() {
Ok(s) => Ok(TomlVersion { version: s }),
- Err(e) => Err(d.error(e.as_slice())),
+ Err(e) => Err(d.error(&e)),
}
}
}
impl TomlProject {
pub fn to_package_id(&self, source_id: &SourceId) -> CargoResult<PackageId> {
- PackageId::new(self.name.as_slice(), self.version.version.clone(),
+ PackageId::new(&self.name, self.version.version.clone(),
source_id)
}
}
}
}).collect()
}
- None => inferred_lib_target(project.name.as_slice(), layout),
+ None => inferred_lib_target(&project.name, layout),
};
let bins = match self.bin {
}
}).collect()
}
- None => inferred_bin_targets(project.name.as_slice(), layout)
+ None => inferred_bin_targets(&project.name, layout)
};
let examples = match self.example {
// Get targets
let profiles = self.profile.clone().unwrap_or(Default::default());
- let targets = normalize(lib.as_slice(),
- bins.as_slice(),
+ let targets = normalize(&lib,
+ &bins,
new_build,
- examples.as_slice(),
- tests.as_slice(),
- benches.as_slice(),
+ &examples,
+ &tests,
+ &benches,
&metadata,
&profiles);
let new_source_id = match details.git {
Some(ref git) => {
- let loc = try!(git.as_slice().to_url().map_err(|e| {
+ let loc = try!(git.to_url().map_err(|e| {
human(e)
}));
Some(SourceId::for_git(&loc, reference))
}
None => {
details.path.as_ref().map(|path| {
- cx.nested_paths.push(Path::new(path.as_slice()));
+ cx.nested_paths.push(Path::new(path));
cx.source_id.clone()
})
}
}.unwrap_or(try!(SourceId::for_central(cx.config)));
- let dep = try!(Dependency::parse(n.as_slice(),
+ let dep = try!(Dependency::parse(&n,
details.version.as_ref()
.map(|v| v.as_slice()),
&new_source_id));
impl PathValue {
fn to_path(&self) -> Path {
match *self {
- PathValue::String(ref s) => Path::new(s.as_slice()),
+ PathValue::String(ref s) => Path::new(s),
PathValue::Path(ref p) => p.clone(),
}
}
PathValue::String(format!("src/{}.rs", l.name))
});
let crate_types = l.crate_type.clone().and_then(|kinds| {
- LibKind::from_strs(kinds).ok()
+ kinds.iter().map(|s| LibKind::from_str(s))
+ .collect::<CargoResult<_>>().ok()
}).unwrap_or_else(|| {
vec![if l.plugin == Some(true) {LibKind::Dylib} else {LibKind::Lib}]
});
if profile.is_test() {
metadata.mix(&"test");
}
- dst.push(Target::lib_target(l.name.as_slice(), crate_types.clone(),
+ dst.push(Target::lib_target(&l.name, crate_types.clone(),
&path.to_path(), profile,
metadata));
}
} else {
None
};
- dst.push(Target::bin_target(bin.name.as_slice(),
+ dst.push(Target::bin_target(&bin.name,
&path.to_path(),
profile,
metadata));
let name = format!("build-script-{}", cmd.filestem_str().unwrap_or(""));
for profile in profiles.iter() {
- dst.push(Target::custom_build_target(name.as_slice(),
- cmd, profile, None));
+ dst.push(Target::custom_build_target(&name, cmd, profile, None));
}
}
let profile = merge(Profile::default_example(), &profiles.test);
let profile_release = merge(Profile::default_release(), &profiles.release);
- dst.push(Target::example_target(ex.name.as_slice(),
+ dst.push(Target::example_target(&ex.name,
&path.to_path(),
&profile));
- dst.push(Target::example_target(ex.name.as_slice(),
+ dst.push(Target::example_target(&ex.name,
&path.to_path(),
&profile_release));
}
let profile = Profile::default_test().harness(harness);
let profile = merge(profile, &profiles.test);
- dst.push(Target::test_target(test.name.as_slice(),
+ dst.push(Target::test_target(&test.name,
&path.to_path(),
&profile,
metadata));
let profile = Profile::default_bench().harness(harness);
let profile = merge(profile, &profiles.bench);
- dst.push(Target::bench_target(bench.name.as_slice(),
+ dst.push(Target::bench_target(&bench.name,
&path.to_path(),
&profile,
metadata));
test_targets(&mut ret, tests, metadata, profiles,
|test| {
- if test.name.as_slice() == "test" {
+ if test.name == "test" {
"src/test.rs".to_string()
} else {
format!("tests/{}.rs", test.name)
bench_targets(&mut ret, benches, metadata, profiles,
|bench| {
- if bench.name.as_slice() == "bench" {
+ if bench.name == "bench" {
"src/bench.rs".to_string()
} else {
format!("benches/{}.rs", bench.name)
let body = json::encode(&OwnersReq { users: owners }).unwrap();
let body = try!(self.put(format!("/crates/{}/owners", krate),
body.as_bytes()));
- assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ assert!(json::decode::<R>(&body).unwrap().ok);
Ok(())
}
let body = json::encode(&OwnersReq { users: owners }).unwrap();
let body = try!(self.delete(format!("/crates/{}/owners", krate),
Some(body.as_bytes())));
- assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ assert!(json::decode::<R>(&body).unwrap().ok);
Ok(())
}
pub fn list_owners(&mut self, krate: &str) -> Result<Vec<User>> {
let body = try!(self.get(format!("/crates/{}/owners", krate)));
- Ok(json::decode::<Users>(body.as_slice()).unwrap().users)
+ Ok(json::decode::<Users>(&body).unwrap().users)
}
pub fn publish(&mut self, krate: &NewCrate, tarball: &Path) -> Result<()> {
let header = {
let mut w = MemWriter::new();
w.write_le_u32(json.len() as u32).unwrap();
- w.write_str(json.as_slice()).unwrap();
+ w.write_str(&json).unwrap();
w.write_le_u32(stat.size as u32).unwrap();
MemReader::new(w.into_inner())
};
let url = format!("{}/api/v1/crates/new", self.host);
- let token = try!(self.token.as_ref().ok_or(Error::TokenMissing)).as_slice();
+ let token = try!(self.token.as_ref().ok_or(Error::TokenMissing));
let request = self.handle.put(url, &mut body)
.content_length(size)
.header("Accept", "application/json")
- .header("Authorization", token);
+ .header("Authorization", &token);
let response = handle(request.exec());
let _body = try!(response);
Ok(())
}
pub fn search(&mut self, query: &str) -> Result<Vec<Crate>> {
- let body = try!(self.req(format!("/crates?q={}", query), None, Get, Auth::Unauthorized));
+ let body = try!(self.req(format!("/crates?q={}", query), None, Get,
+ Auth::Unauthorized));
- Ok(json::decode::<Crates>(body.as_slice()).unwrap().crates)
+ Ok(json::decode::<Crates>(&body).unwrap().crates)
}
pub fn yank(&mut self, krate: &str, version: &str) -> Result<()> {
let body = try!(self.delete(format!("/crates/{}/{}/yank", krate, version),
None));
- assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ assert!(json::decode::<R>(&body).unwrap().ok);
Ok(())
}
pub fn unyank(&mut self, krate: &str, version: &str) -> Result<()> {
let body = try!(self.put(format!("/crates/{}/{}/unyank", krate, version),
&[]));
- assert!(json::decode::<R>(body.as_slice()).unwrap().ok);
+ assert!(json::decode::<R>(&body).unwrap().ok);
Ok(())
}
.content_type("application/json");
if authorized == Auth::Authorized {
- let token = try!(self.token.as_ref().ok_or(Error::TokenMissing)).as_slice();
- req = req.header("Authorization", token);
+ let token = try!(self.token.as_ref().ok_or(Error::TokenMissing));
+ req = req.header("Authorization", &token);
}
match body {
Some(b) => req = req.body(b),
Ok(body) => body,
Err(..) => return Err(Error::NonUtf8Body),
};
- match json::decode::<ApiErrorList>(body.as_slice()) {
+ match json::decode::<ApiErrorList>(&body) {
Ok(errors) => {
return Err(Error::Api(errors.errors.into_iter().map(|s| s.detail)
.collect()))